import numpy as np
import cv2
import glob
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import glob
import pickle
from moviepy.editor import VideoFileClip
from IPython.display import HTML
# %matplotlib qt
%matplotlib inline
# get images
images = glob.glob('test_images/*.jpg')
# get mtx, dist
dist_pickle = pickle.load( open( "examples/wide_dist_pickle.p", "rb" ) )
mtx = dist_pickle["mtx"]
dist = dist_pickle["dist"]
h, w = mpimg.imread(images[0]).shape[:2]
print("All images, video format: {}x{}".format(w, h))
src = np.float32([(585, 460),
(697, 460),
(1044, 690),
(259, 690)])
dst = np.float32([(320, 0),
(960, 0),
(960, h),
(320, h)])
print("loaded images")
def cal_undistort(img):
undist = cv2.undistort(img, mtx, dist, None, mtx)
return undist
def warper(img, src, dst):
img_size = (img.shape[1], img.shape[0])
M = cv2.getPerspectiveTransform(src, dst)
warped = cv2.warpPerspective(img, M, img_size, flags=cv2.INTER_NEAREST)
return warped
print("Distord functions loaded")
## Gradient functions
def abs_sobel_thresh(img, orient='x', sobel_kernel=3, thresh=(0, 255)):
# Convert to grayscale
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# Apply x or y gradient with the OpenCV Sobel() function
# and take the absolute value
if orient == 'x':
abs_sobel = np.absolute(cv2.Sobel(gray, cv2.CV_64F, 1, 0))
if orient == 'y':
abs_sobel = np.absolute(cv2.Sobel(gray, cv2.CV_64F, 0, 1))
# Rescale back to 8 bit integer
scaled_sobel = np.uint8(255*abs_sobel/np.max(abs_sobel))
# Create a copy and apply the threshold
grad_binary = np.zeros_like(scaled_sobel)
# Here I'm using inclusive (>=, <=) thresholds, but exclusive is ok too
grad_binary[(scaled_sobel >= thresh[0]) & (scaled_sobel <= thresh[1])] = 1
# Return the result
return grad_binary
def mag_thresh(img, sobel_kernel=3, mag_thresh=(0, 255)):
# Convert to grayscale
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# Take both Sobel x and y gradients
sobelx = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=sobel_kernel)
sobely = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=sobel_kernel)
# Calculate the gradient magnitude
gradmag = np.sqrt(sobelx**2 + sobely**2)
# Rescale to 8 bit
scale_factor = np.max(gradmag)/255
gradmag = (gradmag/scale_factor).astype(np.uint8)
# Create a binary image of ones where threshold is met, zeros otherwise
mag_binary = np.zeros_like(gradmag)
mag_binary[(gradmag >= mag_thresh[0]) & (gradmag <= mag_thresh[1])] = 1
# Return the binary image
return mag_binary
def dir_threshold(img, sobel_kernel=3, thresh=(0, np.pi/2)):
# Grayscale
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
# Calculate the x and y gradients
sobelx = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=sobel_kernel)
sobely = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=sobel_kernel)
# Take the absolute value of the gradient direction,
# apply a threshold, and create a binary image result
absgraddir = np.arctan2(np.absolute(sobely), np.absolute(sobelx))
dir_binary = np.zeros_like(absgraddir)
dir_binary[(absgraddir >= thresh[0]) & (absgraddir <= thresh[1])] = 1
# Apply threshold
return dir_binary
def sobel_combined(img, ksize=3):
gradx = abs_sobel_thresh(img, orient='x', sobel_kernel=ksize, thresh=(20, 100))
grady = abs_sobel_thresh(img, orient='y', sobel_kernel=ksize, thresh=(20, 100))
mag_binary = mag_thresh(img, sobel_kernel=ksize, mag_thresh=(30, 100))
dir_binary = dir_threshold(img, sobel_kernel=ksize, thresh=(0, np.pi/2))
combined = np.zeros_like(dir_binary)
combined[((gradx == 1) & (grady == 1)) | ((mag_binary == 1) & (dir_binary == 1))] = 1
return combined
print("Sobel functions loaded")
## Color Functions
# RGB binaries
def binary_img(img, thresh=(180, 255)):
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
binary = np.zeros_like(gray)
binary[(gray > thresh[0]) & (gray <= thresh[1])] = 1
return binary
def r_binary(img, thresh=(200, 255)):
R = img[:, :, 0]
# thresh = (145, 200)
binary = np.zeros_like(R)
binary[(R > thresh[0]) & (R <= thresh[1])] = 1
return binary
def g_binary(img, thresh=(200, 255)):
G = img[:, :, 1]
binary = np.zeros_like(G)
binary[(G > thresh[0]) & (G <= thresh[1])] = 1
return binary
def b_binary(img, thresh=(200, 255)):
B = img[:, :, 2]
binary = np.zeros_like(B)
binary[(B > thresh[0]) & (B <= thresh[1])] = 1
return binary
# HLS binaries
def h_binary(img, thresh=(15, 100)):
hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
H = hls[:, :, 0]
binary = np.zeros_like(H)
binary[(H > thresh[0]) & (H <= thresh[1])] = 1
return binary
def l_binary(img, thresh=(215, 255)):
hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
L = hls[:, :, 1]
binary = np.zeros_like(L)
binary[(L > thresh[0]) & (L <= thresh[1])] = 1
return binary
def s_binary(img, thresh=(215, 255)):
hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
S = hls[:, :, 2]
binary = np.zeros_like(S)
binary[(S > thresh[0]) & (S <= thresh[1])] = 1
return binary
print('Color functions loaded')
## Pipeline
def pipeline(img, s_thresh=(170, 255), sx_thresh=(20, 100)):
img = np.copy(img)
# Convert to HLS color space and separate the V channel
hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
l_channel = hls[:,:,1]
# Sobel x
# Take the derivative in x
sobelx = cv2.Sobel(l_channel, cv2.CV_64F, 1, 0)
# Absolute x derivative to accentuate lines away from horizontal
abs_sobelx = np.absolute(sobelx)
scaled_sobel = np.uint8(255*abs_sobelx/np.max(abs_sobelx))
# Threshold x gradient
sxbinary = np.zeros_like(scaled_sobel)
sxbinary[(scaled_sobel >= sx_thresh[0]) & (scaled_sobel <= sx_thresh[1])] = 1
# Threshold color channel
s_bin = s_binary(hls, thresh=s_thresh)
# Stack each channel
color_binary = np.dstack((np.zeros_like(sxbinary), sxbinary, s_bin)) * 255
return sxbinary, color_binary
## Example of distortion-corrected image
img = cv2.imread('test_images/straight_lines1.jpg')
gray = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
undistort = cal_undistort(gray)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(24, 9))
f.tight_layout()
ax1.imshow(gray)
ax1.set_title('Original Image', fontsize=50)
ax2.imshow(undistort)
ax2.set_title('Undistorted Image', fontsize=50)
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.savefig('output_images/undistorted_image.jpg')
plt.show()
## Unwraped image
unwarped = warper(undistort, src, dst)
# draw polylines
undistort_cp = undistort.copy()
unwarped_cp = unwarped.copy()
cv2.polylines(undistort_cp, np.int32([src]), True, (255,0,0), 4)
cv2.polylines(unwarped_cp, np.int32([dst]), True, (255,0,0), 4)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(24, 9))
f.tight_layout()
ax1.imshow(undistort_cp)
ax1.set_title('Undistorted Image', fontsize=50)
ax2.imshow(unwarped_cp)
ax2.set_title('Unwarped Image', fontsize=50)
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.savefig('output_images/unwarped_image.jpg')
plt.show()
## Sobel Examples
f, axs = plt.subplots(2, 3, figsize=(30, 15))
axs = axs.ravel()
f.tight_layout()
axs[0].imshow(unwarped)
axs[0].set_title('Unwarped Image', fontsize=30)
grad_binary_x = abs_sobel_thresh(unwarped, orient='x', thresh=(20, 100))
axs[1].imshow(grad_binary_x, cmap='gray')
axs[1].set_title('Thresholded Gradient orient=x', fontsize=30)
grad_binary_y = abs_sobel_thresh(unwarped, orient='y', thresh=(20, 100))
axs[2].imshow(grad_binary_y, cmap='gray')
axs[2].set_title('Thresholded Gradient orient=y', fontsize=30)
mag_binary = mag_thresh(unwarped, sobel_kernel=3, mag_thresh=(30, 100))
axs[3].imshow(mag_binary, cmap='gray')
axs[3].set_title('Thresholded Magnitude xy orient', fontsize=30)
dir_binary = dir_threshold(unwarped, sobel_kernel=15, thresh=(0.7, 1.3))
axs[4].imshow(dir_binary, cmap='gray')
axs[4].set_title('Thresholded Grad. Dir.', fontsize=30)
comb_img = sobel_combined(unwarped, ksize=3)
axs[5].imshow(comb_img, cmap='gray')
axs[5].set_title('Thresholded Combined', fontsize=30)
# Show pictures
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.savefig('output_images/sobel_thresholds.jpg')
plt.show()
## Color Thresholds
f, axs = plt.subplots(5, 3, figsize=(24, 4 * 5))
axs = axs.ravel()
axs[0].imshow(unwarped)
axs[0].set_title("Original", fontsize=30)
gray = cv2.cvtColor(unwarped, cv2.COLOR_RGB2GRAY)
axs[1].imshow(gray, cmap='gray')
axs[1].set_title("Gray", fontsize=30)
binary = binary_img(unwarped)
axs[2].imshow(binary, cmap='gray')
axs[2].set_title("Gray Binary", fontsize=30)
r_image = unwarped[:,:,0]
axs[3].imshow(r_image, cmap='gray')
axs[3].set_title("R Image", fontsize=30)
g_image = unwarped[:,:,1]
axs[4].imshow(g_image, cmap='gray')
axs[4].set_title("G Image", fontsize=30)
b_image = unwarped[:,:,2]
axs[5].imshow(b_image, cmap='gray')
axs[5].set_title("B Image", fontsize=30)
r_bin = r_binary(unwarped)
axs[6].imshow(r_bin, cmap='gray')
axs[6].set_title("R Binary", fontsize=30)
g_bin = g_binary(unwarped)
axs[7].imshow(g_bin, cmap='gray')
axs[7].set_title("G Binary", fontsize=30)
b_bin = b_binary(unwarped)
axs[8].imshow(b_bin, cmap='gray')
axs[8].set_title("B Binary", fontsize=30)
# Convert image to HLS
hls = cv2.cvtColor(unwarped, cv2.COLOR_RGB2HLS)
h_image = hls[:, :, 0]
axs[9].imshow(h_image, cmap='gray')
axs[9].set_title("H image", fontsize=30)
l_image = hls[:, :, 1]
axs[10].imshow(l_image, cmap='gray')
axs[10].set_title("L image", fontsize=30)
s_image = hls[:, :, 2]
axs[11].imshow(s_image, cmap='gray')
axs[11].set_title("S image", fontsize=30)
h_bin = h_binary(unwarped)
axs[12].imshow(h_bin, cmap='gray')
axs[12].set_title("H binary", fontsize=30)
l_bin = l_binary(unwarped)
axs[13].imshow(l_bin, cmap='gray')
axs[13].set_title("L binary", fontsize=30)
s_bin = s_binary(unwarped)
axs[14].imshow(s_bin, cmap='gray')
axs[14].set_title("S binary", fontsize=30)
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.savefig('output_images/color_thresholds.jpg')
plt.show()
sxbinary, color_binary = pipeline(unwarped)
# Plot the result
f, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(24, 9))
f.tight_layout()
ax1.imshow(unwarped)
ax1.set_title('Original Image', fontsize=30)
ax2.imshow(sxbinary, cmap='gray')
ax2.set_title("Combined S channel and gradient thresholds", fontsize=30)
ax3.imshow(color_binary)
ax3.set_title('Stacked thresholds', fontsize=30)
plt.subplots_adjust(left=0., right=1, top=0.9, bottom=0.)
plt.savefig('output_images/pipeline.jpg')
plt.show()
## Finding the lanes
def hist(img):
img = img / 255
# Grab only the bottom half of the image
# Lane lines are likely to be mostly vertical nearest to the car
bottom_half = img[img.shape[0]//2:,:]
# Sum across image pixels vertically - make sure to set an `axis`
# i.e. the highest areas of vertical lines should be larger values
histogram = np.sum(bottom_half, axis=0)
return histogram
def find_lane_pixels(binary_warped):
# Take a histogram of the bottom half of the image
histogram = np.sum(binary_warped[binary_warped.shape[0]//2:,:], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((binary_warped, binary_warped, binary_warped))
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0]//2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
# HYPERPARAMETERS
# Choose the number of sliding windows
nwindows = 9
# Set the width of the windows +/- margin
margin = 100
# Set minimum number of pixels found to recenter window
minpix = 50
# Set height of windows - based on nwindows above and image shape
window_height = np.int(binary_warped.shape[0]//nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated later for each window in nwindows
leftx_current = leftx_base
rightx_current = rightx_base
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
# Step through the windows one by one
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = binary_warped.shape[0] - (window+1)*window_height
win_y_high = binary_warped.shape[0] - window*window_height
win_xleft_low = leftx_current - margin
win_xleft_high = leftx_current + margin
win_xright_low = rightx_current - margin
win_xright_high = rightx_current + margin
# Draw the windows on the visualization image
cv2.rectangle(out_img,(win_xleft_low,win_y_low),
(win_xleft_high,win_y_high),(0,255,0), 2)
cv2.rectangle(out_img,(win_xright_low,win_y_low),
(win_xright_high,win_y_high),(0,255,0), 2)
# Identify the nonzero pixels in x and y within the window #
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xleft_low) & (nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xright_low) & (nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
# If you found > minpix pixels, recenter next window on their mean position
if len(good_left_inds) > minpix:
leftx_current = np.int(np.mean(nonzerox[good_left_inds]))
if len(good_right_inds) > minpix:
rightx_current = np.int(np.mean(nonzerox[good_right_inds]))
# Concatenate the arrays of indices (previously was a list of lists of pixels)
try:
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
except ValueError:
# Avoids an error if the above is not implemented fully
pass
# Extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
return leftx, lefty, rightx, righty, out_img
def fit_polynomial(binary_warped):
# Find our lane pixels first
leftx, lefty, rightx, righty, out_img = find_lane_pixels(binary_warped)
# Fit a second order polynomial to each using `np.polyfit`
left_fit = np.polyfit(lefty, leftx, 2)
right_fit = np.polyfit(righty, rightx, 2)
# Generate x and y values for plotting
ploty = np.linspace(0, binary_warped.shape[0]-1, binary_warped.shape[0] )
try:
left_fitx = left_fit[0]*ploty**2 + left_fit[1]*ploty + left_fit[2]
right_fitx = right_fit[0]*ploty**2 + right_fit[1]*ploty + right_fit[2]
except TypeError:
# Avoids an error if `left` and `right_fit` are still none or incorrect
print('The function failed to fit a line!')
left_fitx = 1*ploty**2 + 1*ploty
right_fitx = 1*ploty**2 + 1*ploty
## Visualization ##
# Colors in the left and right lane regions
out_img[lefty, leftx] = [255, 0, 0]
out_img[righty, rightx] = [0, 0, 255]
# Plots the left and right polynomials on the lane lines
# plt.plot(left_fitx, ploty, color='yellow')
# plt.plot(right_fitx, ploty, color='yellow')
return out_img
def fit_poly(img_shape, leftx, lefty, rightx, righty):
### Fit a second order polynomial to each with np.polyfit() ###
left_fit = np.polyfit(lefty, leftx, 2)
right_fit = np.polyfit(righty, rightx, 2)
# Generate x and y values for plotting
ploty = np.linspace(0, img_shape[0]-1, img_shape[0])
### Calc both polynomials using ploty, left_fit and right_fit ###
left_fitx = left_fit[0]*ploty**2 + left_fit[1]*ploty + left_fit[2]
right_fitx = right_fit[0]*ploty**2 + right_fit[1]*ploty + right_fit[2]
return left_fitx, right_fitx, ploty
def search_around_poly(binary_warped):
# HYPERPARAMETER
# Choose the width of the margin around the previous polynomial to search
# The quiz grader expects 100 here, but feel free to tune on your own!
margin = 100
# Grab activated pixels
nonzero = binary_warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
### TO-DO: Set the area of search based on activated x-values ###
### within the +/- margin of our polynomial function ###
### Hint: consider the window areas for the similarly named variables ###
### in the previous quiz, but change the windows to our new search area ###
left_lane_inds = ((nonzerox > (left_fit[0]*(nonzeroy**2) + left_fit[1]*nonzeroy +
left_fit[2] - margin)) & (nonzerox < (left_fit[0]*(nonzeroy**2) +
left_fit[1]*nonzeroy + left_fit[2] + margin)))
right_lane_inds = ((nonzerox > (right_fit[0]*(nonzeroy**2) + right_fit[1]*nonzeroy +
right_fit[2] - margin)) & (nonzerox < (right_fit[0]*(nonzeroy**2) +
right_fit[1]*nonzeroy + right_fit[2] + margin)))
# Again, extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Hotfix, fix later
if rightx.size == 0:
return binary_warped
# Fit new polynomials
left_fitx, right_fitx, ploty = fit_poly(binary_warped.shape, leftx, lefty, rightx, righty)
## Visualization ##
# Create an image to draw on and an image to show the selection window
out_img = np.dstack((binary_warped, binary_warped, binary_warped))*255
window_img = np.zeros_like(out_img)
# Color in left and right line pixels
out_img[nonzeroy[left_lane_inds], nonzerox[left_lane_inds]] = [255, 0, 0]
out_img[nonzeroy[right_lane_inds], nonzerox[right_lane_inds]] = [0, 0, 255]
# Generate a polygon to illustrate the search window area
# And recast the x and y points into usable format for cv2.fillPoly()
left_line_window1 = np.array([np.transpose(np.vstack([left_fitx-margin, ploty]))])
left_line_window2 = np.array([np.flipud(np.transpose(np.vstack([left_fitx+margin,
ploty])))])
left_line_pts = np.hstack((left_line_window1, left_line_window2))
right_line_window1 = np.array([np.transpose(np.vstack([right_fitx-margin, ploty]))])
right_line_window2 = np.array([np.flipud(np.transpose(np.vstack([right_fitx+margin,
ploty])))])
right_line_pts = np.hstack((right_line_window1, right_line_window2))
# Draw the lane onto the warped blank image
cv2.fillPoly(window_img, np.int_([left_line_pts]), (0,255, 0))
cv2.fillPoly(window_img, np.int_([right_line_pts]), (0,255, 0))
result = cv2.addWeighted(out_img, 1, window_img, 0.3, 0)
# Plot the polynomial lines onto the image
# plt.plot(left_fitx, ploty, color='yellow')
# plt.plot(right_fitx, ploty, color='yellow')
## End visualization steps ##
return result
print('Line Lanes functions loaded')
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(24, 9))
f.tight_layout()
# Visualize the resulting histogram
ax1.imshow(binary, cmap='gray')
ax1.set_title("Warped image", fontsize=30)
# Create histogram of image binary activations
histogram = hist(binary)
ax2.plot(histogram)
ax2.set_title("Histogram", fontsize=30)
plt.savefig('output_images/histogram.jpg')
plt.show()
## Finding Sliding Window
sliding_window_img = fit_polynomial(binary)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(24, 9))
ax1.imshow(binary, cmap='gray')
ax1.set_title("Warped image", fontsize=30)
ax2.imshow(sliding_window_img)
ax2.set_title("Sliding window", fontsize=30)
# View your output
plt.savefig('output_images/sliding_window.jpg')
plt.show()
## Search from Prior
# Polynomial fit values from the previous frame
leftx, lefty, rightx, righty, out_img = find_lane_pixels(binary)
left_fit = np.polyfit(lefty, leftx, 2)
right_fit = np.polyfit(righty, rightx, 2)
# Run image through the pipeline
# Note that in your project, you'll also want to feed in the previous fits
result = search_around_poly(binary)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(24, 9))
ax1.imshow(binary, cmap='gray')
ax1.set_title("Warped image")
ax2.imshow(result)
ax2.set_title("Polynomial")
# View your output
plt.savefig('output_images/polynomial.jpg')
plt.show()
## Draw lines
def measure_curvature(ploty, leftx, lefty, rightx, righty):
'''
Calculates the curvature of polynomial functions in meters.
'''
# Define conversions in x and y from pixels space to meters
ym_per_pix = 30 / 720 # meters per pixel in y dimension
xm_per_pix = 3.7 / 700 # meters per pixel in x dimension
left_fit_cr = np.polyfit(lefty * ym_per_pix, leftx * xm_per_pix, 2)
right_fit_cr = np.polyfit(righty * ym_per_pix, rightx * xm_per_pix, 2)
# Define y-value where we want radius of curvature
# We'll choose the maximum y-value, corresponding to the bottom of the image
y_eval = np.max(ploty)
# Calculation of R_curve (radius of curvature)
left_curverad = ((1 + (2*left_fit_cr[0]*y_eval*ym_per_pix + left_fit_cr[1])**2)**1.5) / np.absolute(2*left_fit_cr[0])
right_curverad = ((1 + (2*right_fit_cr[0]*y_eval*ym_per_pix + right_fit_cr[1])**2)**1.5) / np.absolute(2*right_fit_cr[0])
return left_curverad, right_curverad, ym_per_pix, xm_per_pix
print("Measure curvature loaded")
## Process image
def process_image(img, text=True):
img = np.copy(img)
undistorted = cal_undistort(img)
M = cv2.getPerspectiveTransform(src, dst)
Minv = cv2.getPerspectiveTransform(dst, src)
warped = warper(undistorted, src, dst)
binary = binary_img(warped)
leftx, lefty, rightx, righty, _ = find_lane_pixels(binary)
# hot fix!
if leftx.size == 0:
return img
left_fitx, right_fitx, ploty = fit_poly(binary.shape, leftx, lefty, rightx, righty)
left_curverad, right_curverad, ym_per_pix, xm_per_pix = measure_curvature(ploty, leftx, lefty, rightx, righty)
# Create an image to draw the lines on
warp_zero = np.zeros_like(img).astype(np.uint8)
color_warp = np.dstack((warp_zero, warp_zero, warp_zero))
# Draw the lane onto the warped blank image
pts_left = np.array([np.transpose(np.vstack([left_fitx, ploty]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fitx, ploty])))])
pts = np.hstack((pts_left, pts_right))
# Fill with green between lanes
cv2.fillPoly(warp_zero, np.int_([pts]), (0, 255, 0))
window_img = cv2.warpPerspective(warp_zero, Minv, (img.shape[1], img.shape[0]))
# Combine the result with the original image
out_img = np.dstack((img, img, img))
result = cv2.addWeighted(img, 1, window_img, 0.3, 0)
left_curv_str = 'Left = ' + str(np.round(left_curverad, 2)) + ' m'
right_curv_str = 'Right = '+ str(np.round(right_curverad, 2)) + ' m'
left_mean = np.mean(leftx)
right_mean = np.mean(rightx)
camera_pos = (binary.shape[1]/2)-np.mean([left_mean, right_mean])
dist_offset = np.round(camera_pos*xm_per_pix,2)
str_offset = 'Offset from center: ' + str(dist_offset) + ' m.'
if text == True:
font = cv2.FONT_HERSHEY_COMPLEX
cv2.putText(result, left_curv_str, (30, 60), font, 1, (255,0,0), 2)
cv2.putText(result, right_curv_str, (w-360, 60), font, 1, (0,0,255), 2)
cv2.putText(result, str_offset, (400, 100), font, 1, (255,255,255), 2)
return result
print("Load processing")
f, axs = plt.subplots(4, 2, figsize=(30, 30))
axs = axs.ravel()
f.tight_layout()
i = 0
for im in glob.glob('test_images/*.jpg'):
img1= mpimg.imread(im)
result = process_image(img1, text=False)
axs[i].imshow(result)
axs[i].set_title("test {}".format(i))
i += 1
plt.savefig('output_images/test_images.jpg')
plt.show()
clip = VideoFileClip("project_video.mp4")
write_output_result = 'output_videos/result.mp4'
write_clip_result = clip.fl_image(process_image)
%time write_clip_result.write_videofile(write_output_result, audio=False)
HTML("""
<video width="960" height="540" controls>
<source src="{}" type="video/mp4">
</video>
""".format(write_output_result))